c7cfd070c6619d9537be16327373799b7e8196be,cdap-watchdog/src/main/java/co/cask/cdap/metrics/data/TimeSeriesTable.java,TimeSeriesTable,getAvailableContextAndMetrics,#MetricsScanQuery#boolean#,177
Before Change
byte[] startRow = entityCodec.paddedEncode(query.getContextPrefix(), query.getMetricPrefix(), tagPrefix,
startTimeBase, query.getRunId(), 0);
byte[] endRow = entityCodec.paddedEncode(query.getContextPrefix(), query.getMetricPrefix(), tagPrefix,
endTimeBase + 1, query.getRunId(), 0xff);
Row rowResult;
// does multiple scan, but we skip duplicate row-keys by incrementing
// the sub-context or metrics part of rowkey before each scan.
do {
FuzzyRowFilter filter = getFilter(query, startTimeBase, endTimeBase + 1, false, FOUR_ONE_BYTES);
ScannerFields fields = new ScannerFields(startRow, endRow, columns, filter);
Scanner scanner = null;
try {
scanner = timeSeriesTable.scan(fields.startRow, fields.endRow, fields.columns, fields.filter);
} catch (Exception e) {
throw new OperationException(StatusCode.INTERNAL_ERROR, e.getMessage(), e);
}
rowResult = scanner.next();
if (rowResult != null) {
byte[] rowKey = rowResult.getRow();
// Decode context and metric from key
int offset = 0;
String contextStr = entityCodec.decode(MetricsEntityType.CONTEXT, rowKey, offset);
// Always have a "." suffix for unique matching
if (query.getContextPrefix() != null && !(contextStr + ".").startsWith(query.getContextPrefix())) {
scanner.close();
break;
}
offset += entityCodec.getEncodedSize(MetricsEntityType.CONTEXT);
String metricName = entityCodec.decode(MetricsEntityType.METRIC, rowKey, offset);
if (isContextQuery) {
// update the rowkey, by incrementing the next-level context part
ByteBuffer contextPart = ByteBuffer.wrap(rowKey);
contextPart.limit(entityCodec.getEncodedSize(MetricsEntityType.CONTEXT));
contextPart = getNextContextTarget(contextPart, query.getContextPrefix());
if (contextPart == null) {
scanner.close();
break;
}
//update the start-row to be row-key for next scan
startRow = rowKey;
metricsScanResults.add(contextStr);
} else {
// update the rowkey, by incrementing the metrics part
ByteBuffer metricsPart = ByteBuffer.wrap(rowKey);
metricsPart.position(entityCodec.getEncodedSize(MetricsEntityType.CONTEXT));
metricsPart.limit(entityCodec.getEncodedSize(MetricsEntityType.CONTEXT) +
entityCodec.getEncodedSize(MetricsEntityType.METRIC));
metricsPart = stopKeyForPrefix(metricsPart);
if (metricsPart == null) {
scanner.close();
break;
}
//update the start-row to be row-key for next scan
startRow = rowKey;
metricsScanResults.add(metricName);
}
}
// no more matching rows found
After Change
// initialize metrics-offset and length of metrics entity type to obtain
// all available metrics in the given context
metricsOffset = entityCodec.getEncodedSize(MetricsEntityType.CONTEXT);
metricsLength = entityCodec.getEncodedSize(MetricsEntityType.METRIC);
}
byte[] startRow = entityCodec.paddedEncode(query.getContextPrefix(), query.getMetricPrefix(), tagPrefix,
startTimeBase, query.getRunId(), 0);
byte[] endRow = entityCodec.paddedEncode(query.getContextPrefix(), query.getMetricPrefix(), tagPrefix,
endTimeBase, query.getRunId(), 0xff);
Row rowResult;
// multiple scans with incrementing the scans startRow row-key to get the next unique part of a context or
// next unique metric based on the parameter isContextQuery.
// by this way, we can skip multiple rows of duplicate rows and scan for the next
// unique context/metric part we are interested in.
// we stop when we cannot find any rows which matches the given contextPrefix or if there are no rows
// returned from the scan.
do {
FuzzyRowFilter filter = getFilter(query, -1, -1, false);
ScannerFields fields = new ScannerFields(startRow, endRow, columns, filter);
Scanner scanner = null;
try {
scanner = timeSeriesTable.scan(fields.startRow, fields.endRow, fields.columns, fields.filter);
} catch (Exception e) {
throw new OperationException(StatusCode.INTERNAL_ERROR, e.getMessage(), e);
}
rowResult = scanner.next();
if (rowResult != null) {
byte[] rowKey = rowResult.getRow();
int offset = 0;
String contextStr = entityCodec.decode(MetricsEntityType.CONTEXT, rowKey, offset);
if (query.getContextPrefix() != null && !contextStr.startsWith(query.getContextPrefix())) {
// if retrieved rowkey's contextPrefix does not match with the contextPrefix in query,
// we stop scanning and return
scanner.close();
break;
}
offset += entityCodec.getEncodedSize(MetricsEntityType.CONTEXT);
if (isContextQuery) {
metricsScanResults.add(contextStr);
// With the next scan we fast-forward to the next row that has different context name
// that we are searching for.
startRow = getNextRow(rowKey, contextOffset, contextLength);
if (startRow == null) {
//reached max possible key for the context, we will stop scanning now and return.
scanner.close();
break;
}
} else {
String metricName = entityCodec.decode(MetricsEntityType.METRIC, rowKey, offset);
metricsScanResults.add(metricName);
// With the next scan we fast-forward to the next row that has different metric name part
// that we are searching for.
startRow = getNextRow(rowKey, metricsOffset, metricsLength);